3. Train-Predict

Result:

  • Kaggle score:

Tensorboard


In [1]:
import time
import os
import pandas as pd

project_name = 'Dog_Breed_Identification'
step_name = 'Train-Predict'
time_str = time.strftime("%Y%m%d_%H%M%S", time.localtime())
run_name = project_name + '_' + step_name + '_' + time_str
print('run_name: ' + run_name)

cwd = os.getcwd()
log_path = os.path.join(cwd, 'log')
model_path = os.path.join(cwd, 'model')
output_path = os.path.join(cwd, 'output')
print('log_path: \t' + log_path)
print('model_path: \t' + model_path)
print('output_path: \t' + output_path)


run_name: Dog_Breed_Identification_Train-Predict_20180223_134540
log_path: 	D:\Udacity\MachineLearning(Advanced)\p6_graduation_project\log
model_path: 	D:\Udacity\MachineLearning(Advanced)\p6_graduation_project\model
output_path: 	D:\Udacity\MachineLearning(Advanced)\p6_graduation_project\output

In [2]:
df = pd.read_csv(os.path.join(cwd, 'input', 'labels.csv'))
print('lables amount: %d' %len(df))
df.head()


lables amount: 10222
Out[2]:
id breed
0 000bec180eb18c7604dcecc8fe0dba07 boston_bull
1 001513dfcb2ffafc82cccf4d8bbaba97 dingo
2 001cdf01b096e06d78e9e5112d419397 pekinese
3 00214f311d5d2247d5dfe4fe24b2303d bluetick
4 0021f9ceb3235effd7fcde7f7538ed62 golden_retriever

In [3]:
import h5py
import numpy as np
from sklearn.utils import shuffle
np.random.seed(2017)

x_train = []
y_train = {}
x_val = []
y_val = {}
x_test = []

cwd = os.getcwd()
# feature_cgg16 = os.path.join(cwd, 'model', 'feature_VGG16_{}.h5'.format(20180219))
# feature_cgg19 = os.path.join(cwd, 'model', 'feature_VGG19_{}.h5'.format(20180219))
# feature_resnet50 = os.path.join(cwd, 'model', 'feature_ResNet50_{}.h5'.format(20180220))
feature_xception = os.path.join(cwd, 'model', 'feature_Xception_{}.h5'.format(20180221))
feature_inception = os.path.join(cwd, 'model', 'feature_InceptionV3_{}.h5'.format(20180221))
feature_inceptionResNetV2 = os.path.join(cwd, 'model', 'feature_InceptionResNetV2_{}.h5'.format(20180221))
# for filename in [feature_cgg16, feature_cgg19, feature_resnet50, feature_xception, feature_inception, feature_inceptionResNetV2]:
for filename in [feature_xception, feature_inception, feature_inceptionResNetV2]:
# for filename in [feature_inception]:
    with h5py.File(filename, 'r') as h:
        x_train.append(np.array(h['train']))
        y_train = np.array(h['train_labels'])
        x_val.append(np.array(h['val']))
        y_val = np.array(h['val_labels'])
        x_test.append(np.array(h['test']))

# print(x_train[0].shape)
x_train = np.concatenate(x_train, axis=-1)
# y_train = np.concatenate(y_train, axis=0)
x_val = np.concatenate(x_val, axis=-1)
# y_val = np.concatenate(y_val, axis=0)
x_test = np.concatenate(x_test, axis=-1)
print(x_train.shape)
print(x_train.shape[1:])
print('-' * 10)

print(x_train.shape)
print(len(y_train))
print(x_val.shape)
print(len(y_val))
print(x_test.shape)


(30290, 5632)
(5632,)
----------
(30290, 5632)
30290
(512, 5632)
512
(10357, 5632)

In [4]:
from sklearn.utils import shuffle
(x_train, y_train) = shuffle(x_train, y_train)

In [5]:
from sklearn.model_selection import train_test_split
# x_train, x_val, y_train, y_val = train_test_split(x_train, y_train, test_size=0.025, random_state=5)
print(x_train.shape)
print(y_train.shape)
print(x_val.shape)
print(y_val.shape)


(30290, 5632)
(30290,)
(512, 5632)
(512,)

In [6]:
from keras.utils.np_utils import to_categorical

y_train = to_categorical(y_train)
y_val = to_categorical(y_val)
print(y_train.shape)
print(y_val.shape)


Using TensorFlow backend.
(30290, 120)
(512, 120)

Build CNN


In [7]:
from sklearn.metrics import confusion_matrix

from keras.utils.np_utils import to_categorical # convert to one-hot-encoding
from keras.models import Sequential
from keras.layers import Dense, Dropout, Input, Flatten, Conv2D, MaxPooling2D, BatchNormalization
from keras.optimizers import Adam
from keras.preprocessing.image import ImageDataGenerator
from keras.callbacks import LearningRateScheduler, TensorBoard

In [8]:
def get_lr(x):
    lr = round(3e-4 * 0.96 ** x, 10)
    if lr < 1e-8:
        lr = 1e-8
    print('%.12f' % lr, end='  ')
    return lr

# annealer = LearningRateScheduler(lambda x: 1e-3 * 0.9 ** x)
annealer = LearningRateScheduler(get_lr)

log_dir = os.path.join(log_path, run_name)
print('log_dir:' + log_dir)
tensorBoard = TensorBoard(log_dir=log_dir)


log_dir:D:\Udacity\MachineLearning(Advanced)\p6_graduation_project\log\Dog_Breed_Identification_Train-Predict_20180223_134540

In [9]:
model = Sequential()
model.add(Dense(4096, input_shape=x_train.shape[1:]))
model.add(Dropout(0.3))
model.add(Dense(4096, activation='sigmoid'))
model.add(Dropout(0.3))
model.add(Dense(4096, activation='sigmoid'))
model.add(Dropout(0.3))
# model.add(Dense(1024, activation='sigmoid'))
# model.add(Dropout(0.3))
model.add(Dense(120, activation='softmax'))

model.compile(optimizer=Adam(lr=1e-4),
              loss='categorical_crossentropy',
              metrics=['accuracy'])

In [10]:
model.summary()


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_1 (Dense)              (None, 4096)              23072768  
_________________________________________________________________
dropout_1 (Dropout)          (None, 4096)              0         
_________________________________________________________________
dense_2 (Dense)              (None, 4096)              16781312  
_________________________________________________________________
dropout_2 (Dropout)          (None, 4096)              0         
_________________________________________________________________
dense_3 (Dense)              (None, 4096)              16781312  
_________________________________________________________________
dropout_3 (Dropout)          (None, 4096)              0         
_________________________________________________________________
dense_4 (Dense)              (None, 120)               491640    
=================================================================
Total params: 57,127,032
Trainable params: 57,127,032
Non-trainable params: 0
_________________________________________________________________

In [11]:
hist = model.fit(x_train, y_train,
                 batch_size=256,
                 epochs=300, #Increase this when not on Kaggle kernel
                 verbose=1,  #1 for ETA, 0 for silent
                 callbacks=[annealer],
                 validation_data=(x_val, y_val))


Train on 30290 samples, validate on 512 samples
0.000300000000  Epoch 1/300
30290/30290 [==============================] - 103s 3ms/step - loss: 1.2248 - acc: 0.7437 - val_loss: 0.3038 - val_acc: 0.9043
0.000288000000  Epoch 2/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.2692 - acc: 0.9208 - val_loss: 0.2561 - val_acc: 0.9297
0.000276480000  Epoch 3/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.2186 - acc: 0.9336 - val_loss: 0.2057 - val_acc: 0.9395
0.000265420800  Epoch 4/300
30290/30290 [==============================] - 103s 3ms/step - loss: 0.1937 - acc: 0.9409 - val_loss: 0.2051 - val_acc: 0.9375
0.000254804000  Epoch 5/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.1690 - acc: 0.9469 - val_loss: 0.1798 - val_acc: 0.9512
0.000244611800  Epoch 6/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.1460 - acc: 0.9538 - val_loss: 0.1688 - val_acc: 0.9473
0.000234827300  Epoch 7/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.1253 - acc: 0.9586 - val_loss: 0.1412 - val_acc: 0.9473
0.000225434200  Epoch 8/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.1120 - acc: 0.9647 - val_loss: 0.1518 - val_acc: 0.9531
0.000216416900  Epoch 9/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.1045 - acc: 0.9662 - val_loss: 0.1568 - val_acc: 0.9512
0.000207760200  Epoch 10/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0895 - acc: 0.9714 - val_loss: 0.1227 - val_acc: 0.9590
0.000199449800  Epoch 11/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0779 - acc: 0.9753 - val_loss: 0.1144 - val_acc: 0.9609
0.000191471800  Epoch 12/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0703 - acc: 0.9771 - val_loss: 0.0809 - val_acc: 0.9727
0.000183812900  Epoch 13/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0623 - acc: 0.9786 - val_loss: 0.0820 - val_acc: 0.9707
0.000176460400  Epoch 14/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0580 - acc: 0.9810 - val_loss: 0.0766 - val_acc: 0.9727
0.000169402000  Epoch 15/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0471 - acc: 0.9848 - val_loss: 0.0791 - val_acc: 0.9707
0.000162625900  Epoch 16/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0431 - acc: 0.9860 - val_loss: 0.0563 - val_acc: 0.9844
0.000156120900  Epoch 17/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0396 - acc: 0.9870 - val_loss: 0.0630 - val_acc: 0.9824
0.000149876000  Epoch 18/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0373 - acc: 0.9877 - val_loss: 0.0572 - val_acc: 0.9844
0.000143881000  Epoch 19/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0341 - acc: 0.9883 - val_loss: 0.0386 - val_acc: 0.9863
0.000138125800  Epoch 20/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0281 - acc: 0.9909 - val_loss: 0.0445 - val_acc: 0.9883
0.000132600700  Epoch 21/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0267 - acc: 0.9909 - val_loss: 0.0433 - val_acc: 0.9844
0.000127296700  Epoch 22/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0226 - acc: 0.9930 - val_loss: 0.0372 - val_acc: 0.9824
0.000122204800  Epoch 23/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0226 - acc: 0.9930 - val_loss: 0.0393 - val_acc: 0.9844
0.000117316600  Epoch 24/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0203 - acc: 0.9935 - val_loss: 0.0182 - val_acc: 0.9941
0.000112624000  Epoch 25/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0186 - acc: 0.9935 - val_loss: 0.0336 - val_acc: 0.9863
0.000108119000  Epoch 26/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0194 - acc: 0.9938 - val_loss: 0.0352 - val_acc: 0.9883
0.000103794300  Epoch 27/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0174 - acc: 0.9945 - val_loss: 0.0303 - val_acc: 0.9902
0.000099642500  Epoch 28/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0176 - acc: 0.9941 - val_loss: 0.0258 - val_acc: 0.9883
0.000095656800  Epoch 29/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0133 - acc: 0.9959 - val_loss: 0.0163 - val_acc: 0.9941
0.000091830500  Epoch 30/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0121 - acc: 0.9962 - val_loss: 0.0329 - val_acc: 0.9922
0.000088157300  Epoch 31/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0126 - acc: 0.9963 - val_loss: 0.0218 - val_acc: 0.9902
0.000084631000  Epoch 32/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0113 - acc: 0.9967 - val_loss: 0.0178 - val_acc: 0.9961
0.000081245800  Epoch 33/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0124 - acc: 0.9964 - val_loss: 0.0173 - val_acc: 0.9941
0.000077995900  Epoch 34/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0107 - acc: 0.9966 - val_loss: 0.0249 - val_acc: 0.9883
0.000074876100  Epoch 35/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0108 - acc: 0.9968 - val_loss: 0.0175 - val_acc: 0.9941
0.000071881000  Epoch 36/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0103 - acc: 0.9966 - val_loss: 0.0164 - val_acc: 0.9961
0.000069005800  Epoch 37/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0103 - acc: 0.9971 - val_loss: 0.0133 - val_acc: 0.9941
0.000066245600  Epoch 38/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0088 - acc: 0.9973 - val_loss: 0.0234 - val_acc: 0.9941
0.000063595800  Epoch 39/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0089 - acc: 0.9973 - val_loss: 0.0169 - val_acc: 0.9941
0.000061051900  Epoch 40/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0081 - acc: 0.9973 - val_loss: 0.0200 - val_acc: 0.9922
0.000058609800  Epoch 41/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0086 - acc: 0.9972 - val_loss: 0.0188 - val_acc: 0.9941
0.000056265500  Epoch 42/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0083 - acc: 0.9975 - val_loss: 0.0108 - val_acc: 0.9941
0.000054014800  Epoch 43/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0075 - acc: 0.9980 - val_loss: 0.0190 - val_acc: 0.9961
0.000051854200  Epoch 44/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0076 - acc: 0.9976 - val_loss: 0.0136 - val_acc: 0.9941
0.000049780100  Epoch 45/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0081 - acc: 0.9974 - val_loss: 0.0118 - val_acc: 0.9941
0.000047788900  Epoch 46/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0075 - acc: 0.9976 - val_loss: 0.0232 - val_acc: 0.9941
0.000045877300  Epoch 47/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0069 - acc: 0.9977 - val_loss: 0.0174 - val_acc: 0.9941
0.000044042200  Epoch 48/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0069 - acc: 0.9976 - val_loss: 0.0117 - val_acc: 0.9961
0.000042280500  Epoch 49/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0071 - acc: 0.9976 - val_loss: 0.0138 - val_acc: 0.9902
0.000040589300  Epoch 50/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0069 - acc: 0.9977 - val_loss: 0.0140 - val_acc: 0.9922
0.000038965700  Epoch 51/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0063 - acc: 0.9979 - val_loss: 0.0160 - val_acc: 0.9941
0.000037407100  Epoch 52/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0059 - acc: 0.9978 - val_loss: 0.0073 - val_acc: 0.9980
0.000035910800  Epoch 53/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0061 - acc: 0.9979 - val_loss: 0.0076 - val_acc: 0.9980
0.000034474400  Epoch 54/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0058 - acc: 0.9981 - val_loss: 0.0120 - val_acc: 0.9961
0.000033095400  Epoch 55/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0058 - acc: 0.9980 - val_loss: 0.0135 - val_acc: 0.9961
0.000031771600  Epoch 56/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0059 - acc: 0.9977 - val_loss: 0.0091 - val_acc: 0.9961
0.000030500700  Epoch 57/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0068 - acc: 0.9976 - val_loss: 0.0128 - val_acc: 0.9961
0.000029280700  Epoch 58/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0056 - acc: 0.9979 - val_loss: 0.0109 - val_acc: 0.9941
0.000028109500  Epoch 59/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0057 - acc: 0.9979 - val_loss: 0.0156 - val_acc: 0.9941
0.000026985100  Epoch 60/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0050 - acc: 0.9980 - val_loss: 0.0124 - val_acc: 0.9961
0.000025905700  Epoch 61/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0050 - acc: 0.9981 - val_loss: 0.0097 - val_acc: 0.9941
0.000024869500  Epoch 62/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0049 - acc: 0.9980 - val_loss: 0.0097 - val_acc: 0.9961
0.000023874700  Epoch 63/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0050 - acc: 0.9979 - val_loss: 0.0130 - val_acc: 0.9922
0.000022919700  Epoch 64/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0052 - acc: 0.9980 - val_loss: 0.0120 - val_acc: 0.9922
0.000022002900  Epoch 65/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0052 - acc: 0.9982 - val_loss: 0.0094 - val_acc: 0.9961
0.000021122800  Epoch 66/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0051 - acc: 0.9982 - val_loss: 0.0098 - val_acc: 0.9941
0.000020277900  Epoch 67/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0049 - acc: 0.9981 - val_loss: 0.0113 - val_acc: 0.9941
0.000019466800  Epoch 68/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0047 - acc: 0.9981 - val_loss: 0.0123 - val_acc: 0.9941
0.000018688100  Epoch 69/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0048 - acc: 0.9982 - val_loss: 0.0117 - val_acc: 0.9941
0.000017940600  Epoch 70/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0047 - acc: 0.9980 - val_loss: 0.0109 - val_acc: 0.9941
0.000017223000  Epoch 71/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0042 - acc: 0.9983 - val_loss: 0.0093 - val_acc: 0.9941
0.000016534000  Epoch 72/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0043 - acc: 0.9982 - val_loss: 0.0106 - val_acc: 0.9941
0.000015872700  Epoch 73/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0042 - acc: 0.9983 - val_loss: 0.0087 - val_acc: 0.9941
0.000015237800  Epoch 74/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0045 - acc: 0.9981 - val_loss: 0.0078 - val_acc: 0.9941
0.000014628300  Epoch 75/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0043 - acc: 0.9983 - val_loss: 0.0089 - val_acc: 0.9941
0.000014043100  Epoch 76/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0041 - acc: 0.9983 - val_loss: 0.0104 - val_acc: 0.9941
0.000013481400  Epoch 77/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0042 - acc: 0.9983 - val_loss: 0.0091 - val_acc: 0.9941
0.000012942100  Epoch 78/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0042 - acc: 0.9982 - val_loss: 0.0139 - val_acc: 0.9922
0.000012424500  Epoch 79/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0040 - acc: 0.9983 - val_loss: 0.0101 - val_acc: 0.9941
0.000011927500  Epoch 80/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0041 - acc: 0.9982 - val_loss: 0.0085 - val_acc: 0.9941
0.000011450400  Epoch 81/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0040 - acc: 0.9984 - val_loss: 0.0100 - val_acc: 0.9941
0.000010992400  Epoch 82/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0039 - acc: 0.9983 - val_loss: 0.0074 - val_acc: 0.9961
0.000010552700  Epoch 83/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0039 - acc: 0.9981 - val_loss: 0.0102 - val_acc: 0.9941
0.000010130600  Epoch 84/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0038 - acc: 0.9983 - val_loss: 0.0094 - val_acc: 0.9941
0.000009725300  Epoch 85/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0037 - acc: 0.9982 - val_loss: 0.0110 - val_acc: 0.9941
0.000009336300  Epoch 86/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0040 - acc: 0.9983 - val_loss: 0.0074 - val_acc: 0.9941
0.000008962900  Epoch 87/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0041 - acc: 0.9981 - val_loss: 0.0069 - val_acc: 0.9941
0.000008604400  Epoch 88/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0041 - acc: 0.9981 - val_loss: 0.0099 - val_acc: 0.9941
0.000008260200  Epoch 89/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0037 - acc: 0.9983 - val_loss: 0.0086 - val_acc: 0.9961
0.000007929800  Epoch 90/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0039 - acc: 0.9981 - val_loss: 0.0086 - val_acc: 0.9941
0.000007612600  Epoch 91/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0034 - acc: 0.9983 - val_loss: 0.0083 - val_acc: 0.9941
0.000007308100  Epoch 92/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0040 - acc: 0.9983 - val_loss: 0.0076 - val_acc: 0.9941
0.000007015800  Epoch 93/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0037 - acc: 0.9982 - val_loss: 0.0078 - val_acc: 0.9961
0.000006735100  Epoch 94/300
30290/30290 [==============================] - 103s 3ms/step - loss: 0.0038 - acc: 0.9983 - val_loss: 0.0087 - val_acc: 0.9941
0.000006465700  Epoch 95/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0037 - acc: 0.9983 - val_loss: 0.0102 - val_acc: 0.9941
0.000006207100  Epoch 96/300
30290/30290 [==============================] - 103s 3ms/step - loss: 0.0039 - acc: 0.9981 - val_loss: 0.0095 - val_acc: 0.9941
0.000005958800  Epoch 97/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0033 - acc: 0.9985 - val_loss: 0.0086 - val_acc: 0.9941
0.000005720500  Epoch 98/300
30290/30290 [==============================] - 103s 3ms/step - loss: 0.0036 - acc: 0.9983 - val_loss: 0.0086 - val_acc: 0.9941
0.000005491600  Epoch 99/300
30290/30290 [==============================] - 103s 3ms/step - loss: 0.0033 - acc: 0.9985 - val_loss: 0.0088 - val_acc: 0.9941
0.000005272000  Epoch 100/300
30290/30290 [==============================] - 103s 3ms/step - loss: 0.0033 - acc: 0.9985 - val_loss: 0.0086 - val_acc: 0.9941
0.000005061100  Epoch 101/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0039 - acc: 0.9982 - val_loss: 0.0090 - val_acc: 0.9941
0.000004858700  Epoch 102/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0038 - acc: 0.9981 - val_loss: 0.0079 - val_acc: 0.9941
0.000004664300  Epoch 103/300
30290/30290 [==============================] - 103s 3ms/step - loss: 0.0034 - acc: 0.9985 - val_loss: 0.0085 - val_acc: 0.9941
0.000004477700  Epoch 104/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0036 - acc: 0.9982 - val_loss: 0.0083 - val_acc: 0.9941
0.000004298600  Epoch 105/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0037 - acc: 0.9981 - val_loss: 0.0076 - val_acc: 0.9941
0.000004126700  Epoch 106/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0034 - acc: 0.9983 - val_loss: 0.0077 - val_acc: 0.9941
0.000003961600  Epoch 107/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0034 - acc: 0.9982 - val_loss: 0.0079 - val_acc: 0.9941
0.000003803100  Epoch 108/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0036 - acc: 0.9983 - val_loss: 0.0076 - val_acc: 0.9941
0.000003651000  Epoch 109/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0037 - acc: 0.9980 - val_loss: 0.0073 - val_acc: 0.9941
0.000003505000  Epoch 110/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9986 - val_loss: 0.0083 - val_acc: 0.9941
0.000003364800  Epoch 111/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9986 - val_loss: 0.0078 - val_acc: 0.9941
0.000003230200  Epoch 112/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9985 - val_loss: 0.0077 - val_acc: 0.9941
0.000003101000  Epoch 113/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0033 - acc: 0.9982 - val_loss: 0.0081 - val_acc: 0.9941
0.000002976900  Epoch 114/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0033 - acc: 0.9985 - val_loss: 0.0080 - val_acc: 0.9941
0.000002857900  Epoch 115/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9985 - val_loss: 0.0081 - val_acc: 0.9941
0.000002743600  Epoch 116/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0035 - acc: 0.9983 - val_loss: 0.0082 - val_acc: 0.9941
0.000002633800  Epoch 117/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0032 - acc: 0.9985 - val_loss: 0.0083 - val_acc: 0.9941
0.000002528500  Epoch 118/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9984 - val_loss: 0.0081 - val_acc: 0.9941
0.000002427300  Epoch 119/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0032 - acc: 0.9984 - val_loss: 0.0084 - val_acc: 0.9941
0.000002330200  Epoch 120/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9985 - val_loss: 0.0079 - val_acc: 0.9941
0.000002237000  Epoch 121/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9986 - val_loss: 0.0082 - val_acc: 0.9941
0.000002147500  Epoch 122/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0035 - acc: 0.9983 - val_loss: 0.0086 - val_acc: 0.9941
0.000002061600  Epoch 123/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9986 - val_loss: 0.0084 - val_acc: 0.9941
0.000001979200  Epoch 124/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9984 - val_loss: 0.0078 - val_acc: 0.9941
0.000001900000  Epoch 125/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0033 - acc: 0.9982 - val_loss: 0.0080 - val_acc: 0.9941
0.000001824000  Epoch 126/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9986 - val_loss: 0.0076 - val_acc: 0.9941
0.000001751000  Epoch 127/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0028 - acc: 0.9986 - val_loss: 0.0077 - val_acc: 0.9941
0.000001681000  Epoch 128/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0032 - acc: 0.9983 - val_loss: 0.0079 - val_acc: 0.9941
0.000001613800  Epoch 129/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0033 - acc: 0.9984 - val_loss: 0.0077 - val_acc: 0.9941
0.000001549200  Epoch 130/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0033 - acc: 0.9983 - val_loss: 0.0074 - val_acc: 0.9941
0.000001487200  Epoch 131/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0028 - acc: 0.9988 - val_loss: 0.0077 - val_acc: 0.9941
0.000001427800  Epoch 132/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0034 - acc: 0.9982 - val_loss: 0.0078 - val_acc: 0.9941
0.000001370600  Epoch 133/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9985 - val_loss: 0.0073 - val_acc: 0.9941
0.000001315800  Epoch 134/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9985 - val_loss: 0.0072 - val_acc: 0.9941
0.000001263200  Epoch 135/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9984 - val_loss: 0.0075 - val_acc: 0.9941
0.000001212700  Epoch 136/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0032 - acc: 0.9984 - val_loss: 0.0075 - val_acc: 0.9941
0.000001164200  Epoch 137/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0032 - acc: 0.9986 - val_loss: 0.0078 - val_acc: 0.9941
0.000001117600  Epoch 138/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0036 - acc: 0.9983 - val_loss: 0.0078 - val_acc: 0.9941
0.000001072900  Epoch 139/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9984 - val_loss: 0.0077 - val_acc: 0.9941
0.000001030000  Epoch 140/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9985 - val_loss: 0.0077 - val_acc: 0.9941
0.000000988800  Epoch 141/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9985 - val_loss: 0.0077 - val_acc: 0.9941
0.000000949200  Epoch 142/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9985 - val_loss: 0.0076 - val_acc: 0.9941
0.000000911200  Epoch 143/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0032 - acc: 0.9984 - val_loss: 0.0078 - val_acc: 0.9941
0.000000874800  Epoch 144/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0033 - acc: 0.9983 - val_loss: 0.0079 - val_acc: 0.9941
0.000000839800  Epoch 145/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9986 - val_loss: 0.0078 - val_acc: 0.9941
0.000000806200  Epoch 146/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0033 - acc: 0.9983 - val_loss: 0.0077 - val_acc: 0.9941
0.000000774000  Epoch 147/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0032 - acc: 0.9984 - val_loss: 0.0078 - val_acc: 0.9941
0.000000743000  Epoch 148/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0033 - acc: 0.9983 - val_loss: 0.0077 - val_acc: 0.9941
0.000000713300  Epoch 149/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9982 - val_loss: 0.0078 - val_acc: 0.9941
0.000000684800  Epoch 150/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9986 - val_loss: 0.0077 - val_acc: 0.9941
0.000000657400  Epoch 151/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9985 - val_loss: 0.0077 - val_acc: 0.9941
0.000000631100  Epoch 152/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0032 - acc: 0.9985 - val_loss: 0.0079 - val_acc: 0.9941
0.000000605800  Epoch 153/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9984 - val_loss: 0.0079 - val_acc: 0.9941
0.000000581600  Epoch 154/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9984 - val_loss: 0.0080 - val_acc: 0.9941
0.000000558300  Epoch 155/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9986 - val_loss: 0.0081 - val_acc: 0.9941
0.000000536000  Epoch 156/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0028 - acc: 0.9984 - val_loss: 0.0081 - val_acc: 0.9941
0.000000514600  Epoch 157/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9986 - val_loss: 0.0080 - val_acc: 0.9941
0.000000494000  Epoch 158/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9984 - val_loss: 0.0081 - val_acc: 0.9941
0.000000474200  Epoch 159/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0032 - acc: 0.9984 - val_loss: 0.0082 - val_acc: 0.9941
0.000000455200  Epoch 160/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9987 - val_loss: 0.0081 - val_acc: 0.9941
0.000000437000  Epoch 161/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0035 - acc: 0.9982 - val_loss: 0.0081 - val_acc: 0.9941
0.000000419600  Epoch 162/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0028 - acc: 0.9986 - val_loss: 0.0082 - val_acc: 0.9941
0.000000402800  Epoch 163/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9986 - val_loss: 0.0084 - val_acc: 0.9941
0.000000386700  Epoch 164/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9985 - val_loss: 0.0083 - val_acc: 0.9941
0.000000371200  Epoch 165/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9985 - val_loss: 0.0083 - val_acc: 0.9941
0.000000356300  Epoch 166/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9984 - val_loss: 0.0083 - val_acc: 0.9941
0.000000342100  Epoch 167/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0033 - acc: 0.9983 - val_loss: 0.0083 - val_acc: 0.9941
0.000000328400  Epoch 168/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9985 - val_loss: 0.0083 - val_acc: 0.9941
0.000000315300  Epoch 169/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9986 - val_loss: 0.0083 - val_acc: 0.9941
0.000000302700  Epoch 170/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9986 - val_loss: 0.0083 - val_acc: 0.9941
0.000000290600  Epoch 171/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0032 - acc: 0.9983 - val_loss: 0.0082 - val_acc: 0.9941
0.000000278900  Epoch 172/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9986 - val_loss: 0.0081 - val_acc: 0.9941
0.000000267800  Epoch 173/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9986 - val_loss: 0.0082 - val_acc: 0.9941
0.000000257100  Epoch 174/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0028 - acc: 0.9985 - val_loss: 0.0080 - val_acc: 0.9941
0.000000246800  Epoch 175/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9985 - val_loss: 0.0081 - val_acc: 0.9941
0.000000236900  Epoch 176/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9986 - val_loss: 0.0080 - val_acc: 0.9941
0.000000227400  Epoch 177/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0027 - acc: 0.9987 - val_loss: 0.0081 - val_acc: 0.9941
0.000000218300  Epoch 178/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0028 - acc: 0.9988 - val_loss: 0.0081 - val_acc: 0.9941
0.000000209600  Epoch 179/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0032 - acc: 0.9986 - val_loss: 0.0081 - val_acc: 0.9941
0.000000201200  Epoch 180/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0028 - acc: 0.9987 - val_loss: 0.0081 - val_acc: 0.9941
0.000000193200  Epoch 181/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0028 - acc: 0.9985 - val_loss: 0.0081 - val_acc: 0.9941
0.000000185400  Epoch 182/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9984 - val_loss: 0.0081 - val_acc: 0.9941
0.000000178000  Epoch 183/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9986 - val_loss: 0.0081 - val_acc: 0.9941
0.000000170900  Epoch 184/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0027 - acc: 0.9987 - val_loss: 0.0081 - val_acc: 0.9941
0.000000164100  Epoch 185/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9984 - val_loss: 0.0081 - val_acc: 0.9941
0.000000157500  Epoch 186/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0032 - acc: 0.9984 - val_loss: 0.0081 - val_acc: 0.9941
0.000000151200  Epoch 187/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0027 - acc: 0.9987 - val_loss: 0.0081 - val_acc: 0.9941
0.000000145200  Epoch 188/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9985 - val_loss: 0.0081 - val_acc: 0.9941
0.000000139400  Epoch 189/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9984 - val_loss: 0.0081 - val_acc: 0.9941
0.000000133800  Epoch 190/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9984 - val_loss: 0.0081 - val_acc: 0.9941
0.000000128400  Epoch 191/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9986 - val_loss: 0.0081 - val_acc: 0.9941
0.000000123300  Epoch 192/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9983 - val_loss: 0.0081 - val_acc: 0.9941
0.000000118400  Epoch 193/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0027 - acc: 0.9987 - val_loss: 0.0081 - val_acc: 0.9941
0.000000113600  Epoch 194/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9986 - val_loss: 0.0081 - val_acc: 0.9941
0.000000109100  Epoch 195/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9983 - val_loss: 0.0081 - val_acc: 0.9941
0.000000104700  Epoch 196/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9986 - val_loss: 0.0080 - val_acc: 0.9941
0.000000100500  Epoch 197/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9984 - val_loss: 0.0080 - val_acc: 0.9941
0.000000096500  Epoch 198/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0028 - acc: 0.9987 - val_loss: 0.0080 - val_acc: 0.9941
0.000000092600  Epoch 199/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9986 - val_loss: 0.0081 - val_acc: 0.9941
0.000000088900  Epoch 200/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0028 - acc: 0.9985 - val_loss: 0.0081 - val_acc: 0.9941
0.000000085400  Epoch 201/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0032 - acc: 0.9985 - val_loss: 0.0080 - val_acc: 0.9941
0.000000082000  Epoch 202/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9984 - val_loss: 0.0080 - val_acc: 0.9941
0.000000078700  Epoch 203/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9984 - val_loss: 0.0080 - val_acc: 0.9941
0.000000075500  Epoch 204/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0033 - acc: 0.9983 - val_loss: 0.0080 - val_acc: 0.9941
0.000000072500  Epoch 205/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9983 - val_loss: 0.0080 - val_acc: 0.9941
0.000000069600  Epoch 206/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9986 - val_loss: 0.0080 - val_acc: 0.9941
0.000000066800  Epoch 207/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9984 - val_loss: 0.0080 - val_acc: 0.9941
0.000000064200  Epoch 208/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0027 - acc: 0.9988 - val_loss: 0.0081 - val_acc: 0.9941
0.000000061600  Epoch 209/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0028 - acc: 0.9988 - val_loss: 0.0081 - val_acc: 0.9941
0.000000059100  Epoch 210/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9985 - val_loss: 0.0081 - val_acc: 0.9941
0.000000056800  Epoch 211/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0033 - acc: 0.9984 - val_loss: 0.0081 - val_acc: 0.9941
0.000000054500  Epoch 212/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9987 - val_loss: 0.0081 - val_acc: 0.9941
0.000000052300  Epoch 213/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0032 - acc: 0.9983 - val_loss: 0.0081 - val_acc: 0.9941
0.000000050200  Epoch 214/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0032 - acc: 0.9983 - val_loss: 0.0081 - val_acc: 0.9941
0.000000048200  Epoch 215/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9984 - val_loss: 0.0080 - val_acc: 0.9941
0.000000046300  Epoch 216/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9986 - val_loss: 0.0080 - val_acc: 0.9941
0.000000044400  Epoch 217/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9984 - val_loss: 0.0080 - val_acc: 0.9941
0.000000042700  Epoch 218/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9984 - val_loss: 0.0080 - val_acc: 0.9941
0.000000040900  Epoch 219/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0032 - acc: 0.9983 - val_loss: 0.0080 - val_acc: 0.9941
0.000000039300  Epoch 220/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9987 - val_loss: 0.0080 - val_acc: 0.9941
0.000000037700  Epoch 221/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9985 - val_loss: 0.0080 - val_acc: 0.9941
0.000000036200  Epoch 222/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0026 - acc: 0.9986 - val_loss: 0.0080 - val_acc: 0.9941
0.000000034800  Epoch 223/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0033 - acc: 0.9985 - val_loss: 0.0080 - val_acc: 0.9941
0.000000033400  Epoch 224/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9986 - val_loss: 0.0080 - val_acc: 0.9941
0.000000032100  Epoch 225/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9986 - val_loss: 0.0080 - val_acc: 0.9941
0.000000030800  Epoch 226/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0028 - acc: 0.9987 - val_loss: 0.0080 - val_acc: 0.9941
0.000000029500  Epoch 227/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9983 - val_loss: 0.0080 - val_acc: 0.9941
0.000000028400  Epoch 228/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9987 - val_loss: 0.0080 - val_acc: 0.9941
0.000000027200  Epoch 229/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9985 - val_loss: 0.0080 - val_acc: 0.9941
0.000000026100  Epoch 230/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9984 - val_loss: 0.0080 - val_acc: 0.9941
0.000000025100  Epoch 231/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0026 - acc: 0.9986 - val_loss: 0.0080 - val_acc: 0.9941
0.000000024100  Epoch 232/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0027 - acc: 0.9986 - val_loss: 0.0080 - val_acc: 0.9941
0.000000023100  Epoch 233/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9984 - val_loss: 0.0080 - val_acc: 0.9941
0.000000022200  Epoch 234/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9987 - val_loss: 0.0080 - val_acc: 0.9941
0.000000021300  Epoch 235/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0032 - acc: 0.9983 - val_loss: 0.0080 - val_acc: 0.9941
0.000000020500  Epoch 236/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0028 - acc: 0.9988 - val_loss: 0.0080 - val_acc: 0.9941
0.000000019600  Epoch 237/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9984 - val_loss: 0.0080 - val_acc: 0.9941
0.000000018900  Epoch 238/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9985 - val_loss: 0.0080 - val_acc: 0.9941
0.000000018100  Epoch 239/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0033 - acc: 0.9983 - val_loss: 0.0080 - val_acc: 0.9941
0.000000017400  Epoch 240/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9987 - val_loss: 0.0080 - val_acc: 0.9941
0.000000016700  Epoch 241/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0032 - acc: 0.9984 - val_loss: 0.0080 - val_acc: 0.9941
0.000000016000  Epoch 242/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0028 - acc: 0.9987 - val_loss: 0.0080 - val_acc: 0.9941
0.000000015400  Epoch 243/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9986 - val_loss: 0.0080 - val_acc: 0.9941
0.000000014800  Epoch 244/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0028 - acc: 0.9984 - val_loss: 0.0080 - val_acc: 0.9941
0.000000014200  Epoch 245/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9985 - val_loss: 0.0080 - val_acc: 0.9941
0.000000013600  Epoch 246/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0027 - acc: 0.9988 - val_loss: 0.0080 - val_acc: 0.9941
0.000000013100  Epoch 247/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9985 - val_loss: 0.0080 - val_acc: 0.9941
0.000000012500  Epoch 248/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0028 - acc: 0.9987 - val_loss: 0.0080 - val_acc: 0.9941
0.000000012000  Epoch 249/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9982 - val_loss: 0.0080 - val_acc: 0.9941
0.000000011600  Epoch 250/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0027 - acc: 0.9987 - val_loss: 0.0080 - val_acc: 0.9941
0.000000011100  Epoch 251/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9987 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010600  Epoch 252/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9985 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010200  Epoch 253/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9984 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 254/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0027 - acc: 0.9987 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 255/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9987 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 256/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9986 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 257/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9985 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 258/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0032 - acc: 0.9983 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 259/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9987 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 260/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0032 - acc: 0.9982 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 261/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9985 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 262/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9984 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 263/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9987 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 264/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9985 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 265/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9985 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 266/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9985 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 267/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0028 - acc: 0.9986 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 268/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0032 - acc: 0.9986 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 269/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9986 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 270/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0033 - acc: 0.9983 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 271/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9986 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 272/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0028 - acc: 0.9987 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 273/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9982 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 274/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9985 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 275/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0033 - acc: 0.9985 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 276/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9984 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 277/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9986 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 278/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9985 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 279/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9984 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 280/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0028 - acc: 0.9987 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 281/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0025 - acc: 0.9989 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 282/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0026 - acc: 0.9987 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 283/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9985 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 284/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0031 - acc: 0.9986 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 285/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0032 - acc: 0.9985 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 286/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0032 - acc: 0.9984 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 287/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9986 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 288/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9987 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 289/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0028 - acc: 0.9988 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 290/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0026 - acc: 0.9989 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 291/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0032 - acc: 0.9985 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 292/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0027 - acc: 0.9986 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 293/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9985 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 294/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0033 - acc: 0.9985 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 295/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9985 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 296/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9985 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 297/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0029 - acc: 0.9986 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 298/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0027 - acc: 0.9986 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 299/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0030 - acc: 0.9985 - val_loss: 0.0080 - val_acc: 0.9941
0.000000010000  Epoch 300/300
30290/30290 [==============================] - 102s 3ms/step - loss: 0.0032 - acc: 0.9986 - val_loss: 0.0080 - val_acc: 0.9941

In [12]:
final_loss, final_acc = model.evaluate(x_val, y_val, verbose=1)
print("Final loss: {0:.4f}, final accuracy: {1:.4f}".format(final_loss, final_acc))


512/512 [==============================] - 0s 820us/step
Final loss: 0.0080, final accuracy: 0.9941

In [13]:
run_name_acc = run_name + '_' + str(int(final_loss*10000)).zfill(4)

In [14]:
histories = pd.DataFrame(hist.history)
histories['epoch'] = hist.epoch
print(histories.columns)
histories_file = os.path.join(model_path, run_name_acc + '.csv')
histories.to_csv(histories_file, index=False)


Index(['acc', 'loss', 'val_acc', 'val_loss', 'epoch'], dtype='object')

In [15]:
import matplotlib.pyplot as plt
%matplotlib inline

plt.plot(hist.history['loss'], color='b')
plt.plot(hist.history['val_loss'], color='r')
plt.show()
plt.plot(hist.history['acc'], color='b')
plt.plot(hist.history['val_acc'], color='r')
plt.show()



In [16]:
def saveModel(model, run_name):
    cwd = os.getcwd()
    modelPath = os.path.join(cwd, 'model')
    if not os.path.isdir(modelPath):
        os.mkdir(modelPath)
    weigthsFile = os.path.join(modelPath, run_name + '.h5')
    model.save(weigthsFile)
saveModel(model, run_name_acc)

Predict


In [17]:
# Used to load model directly and skip train
# import os
# from keras.models import load_model
# cwd = os.getcwd()
# model = load_model(os.path.join(cwd, 'model', 'Dog_Breed_Identification_Train_20171024_155154.h5'))

In [18]:
y_pred = model.predict(x_test, batch_size=128)
print(y_pred.shape)


(10357, 120)

In [19]:
# print(y_pred[:10])
# y_pred = np.clip(y_pred, 0.005, 0.995)
# print(y_pred[:10])

In [20]:
files = os.listdir(os.path.join(cwd, 'input', 'data_test', 'test'))
print(files[:10])


['000621fb3cbb32d8935728e48679680e.jpg', '00102ee9d8eb90812350685311fe5890.jpg', '0012a730dfa437f5f3613fb75efcd4ce.jpg', '001510bc8570bbeee98c8d80c8a95ec1.jpg', '001a5f3114548acdefa3d4da05474c2e.jpg', '00225dcd3e4d2410dd53239f95c0352f.jpg', '002c2a3117c2193b4d26400ce431eebd.jpg', '002c58d413a521ae8d1a5daeb35fc803.jpg', '002f80396f1e3db687c5932d7978b196.jpg', '0036c6bcec6031be9e62a257b1c3c442.jpg']

In [21]:
cwd = os.getcwd()
df = pd.read_csv(os.path.join(cwd, 'input', 'labels.csv'))
print('lables amount: %d' %len(df))
df.head()


lables amount: 10222
Out[21]:
id breed
0 000bec180eb18c7604dcecc8fe0dba07 boston_bull
1 001513dfcb2ffafc82cccf4d8bbaba97 dingo
2 001cdf01b096e06d78e9e5112d419397 pekinese
3 00214f311d5d2247d5dfe4fe24b2303d bluetick
4 0021f9ceb3235effd7fcde7f7538ed62 golden_retriever

In [22]:
n = len(df)
breed = set(df['breed'])
n_class = len(breed)
class_to_num = dict(zip(breed, range(n_class)))
num_to_class = dict(zip(range(n_class), breed))
print(breed)


{'miniature_schnauzer', 'wire-haired_fox_terrier', 'norfolk_terrier', 'bluetick', 'sealyham_terrier', 'norwich_terrier', 'gordon_setter', 'miniature_poodle', 'bernese_mountain_dog', 'pug', 'dhole', 'toy_terrier', 'toy_poodle', 'tibetan_terrier', 'siberian_husky', 'keeshond', 'blenheim_spaniel', 'chihuahua', 'whippet', 'basenji', 'great_pyrenees', 'bedlington_terrier', 'west_highland_white_terrier', 'rhodesian_ridgeback', 'borzoi', 'entlebucher', 'kerry_blue_terrier', 'chesapeake_bay_retriever', 'brabancon_griffon', 'english_springer', 'irish_water_spaniel', 'french_bulldog', 'standard_poodle', 'cardigan', 'soft-coated_wheaten_terrier', 'scottish_deerhound', 'flat-coated_retriever', 'silky_terrier', 'black-and-tan_coonhound', 'german_short-haired_pointer', 'brittany_spaniel', 'weimaraner', 'mexican_hairless', 'pekinese', 'boxer', 'shih-tzu', 'welsh_springer_spaniel', 'saluki', 'cairn', 'schipperke', 'pomeranian', 'doberman', 'sussex_spaniel', 'scotch_terrier', 'english_setter', 'african_hunting_dog', 'otterhound', 'clumber', 'japanese_spaniel', 'malamute', 'affenpinscher', 'great_dane', 'yorkshire_terrier', 'beagle', 'australian_terrier', 'maltese_dog', 'tibetan_mastiff', 'standard_schnauzer', 'rottweiler', 'saint_bernard', 'greater_swiss_mountain_dog', 'eskimo_dog', 'dingo', 'miniature_pinscher', 'bull_mastiff', 'lakeland_terrier', 'papillon', 'irish_setter', 'basset', 'old_english_sheepdog', 'boston_bull', 'leonberg', 'staffordshire_bullterrier', 'pembroke', 'irish_terrier', 'kuvasz', 'cocker_spaniel', 'chow', 'border_terrier', 'malinois', 'curly-coated_retriever', 'border_collie', 'bloodhound', 'newfoundland', 'ibizan_hound', 'norwegian_elkhound', 'redbone', 'walker_hound', 'bouvier_des_flandres', 'italian_greyhound', 'giant_schnauzer', 'airedale', 'dandie_dinmont', 'briard', 'afghan_hound', 'golden_retriever', 'samoyed', 'german_shepherd', 'american_staffordshire_terrier', 'lhasa', 'english_foxhound', 'appenzeller', 'vizsla', 'irish_wolfhound', 'labrador_retriever', 'collie', 'kelpie', 'shetland_sheepdog', 'groenendael', 'komondor'}

In [23]:
df2 = pd.read_csv('.\\input\\sample_submission.csv')
n_test = len(df2)
print(df2.shape)


(10357, 121)

In [24]:
print(y_pred.shape)


(10357, 120)

In [25]:
for i in range(0, 120):
    df2.iloc[:,[i+1]] = y_pred[:,i]
if not os.path.exists(output_path):
    os.mkdir(output_path)
pred_file = os.path.join(output_path, 'pred_' + run_name_acc + '.csv')
df2.to_csv(pred_file, index=None)

In [ ]:


In [26]:
print(run_name_acc)
print('Done !')


Dog_Breed_Identification_Train-Predict_20180223_134540_0080
Done !

In [ ]: